gusucode.com > 支持向量机工具箱 - LIBSVM OSU_SVM LS_SVM源码程序 > 支持向量机工具箱 - LIBSVM OSU_SVM LS_SVM\stprtool\pca\kernelpca.m

    function [Z,Lambda]=kernelpca(X,T,l,ker,arg,display)
% KERNELPCA computes kernel Principal Component Analysis.
%  [Z,lambda]=kernelpca(X,T,l,ker,arg,display)
%
% KERNELPCA computes Principal Component Analysis (PCA)
%  from non-linearly mapped data. The non-linear feature
%  space is determined by a kernel function. 
%  The PCA projection is used to reduce the dimension
%  of non-linearly mapped data into l-dimesional space.
% 
%  The kernel-PCA projection is learnt on the data in 
%  matix T. The data in the matrix X are processed 
%  by the leart non-linear projection and the result is
%  returned in the matrix Z.
%
% Input:
%   T [dxN] contains N training points in d-dimensional space
%   X [dxL] contains L, d-dimensional points to be processed.
%   l [1x1] is a dimension of the reduced (output) space.
%   ker [string] determines non-linear mapping, see help kernel.
%   arg [] arguments of the used kernel.
%   display [1x1] if display==1 then the info will be displayed.
%
% Output:
%   Z [lxL] L processed points in l-dimensional space.
%   Lambda [1xN] eigenvalues of covariance matrix of non-linearly 
%     mapped training points.
%
% See also SPCA, KERNEL, PKERNELPCA.
%   

% Statistical Pattern Recognition Toolbox, Vojtech Franc, Vaclav Hlavac
% (c) Czech Technical University Prague, http://cmp.felk.cvut.cz
% Modifications
% 11-july-2002, VF, mistake "Jt=zeros(N,L)/N" repared 
%              (reported by SH_Srinivasan@Satyam.com).
% 5-July-2001, V.Franc, comments changed
% 20-dec-2000, V.Franc, algorithm was implemented

if nargin < 6,
  display=0;
end

d=size(X,1);  % dimension
N=size(T,2);  % number of training points
L=size(X,2);  % number of points to be processed

% Compute kernel matrix for training data
if display==1,
  disp('Computing kernel matrix for training data...');
end
K=kernel(T,T,ker,arg);

% Centering the training data.
J = ones(N,N)/N;
K = K - J*K - K*J + J*K*J;

% Computing the eigenvectors and eigenvalues of K/N.
if display==1, 
   disp('Computing eigenvalue decomposition...');
end

[U,D]=eig(K/N);
Lambda=real(diag(D));

for k=1:min(N,d),
  if Lambda(k) ~= 0,
     U(:,k)=U(:,k)/sqrt(Lambda(k));
  end
end

% Sort the eigenvalues and the eigenvectors in descending order.
[Lambda,ordered]=sort(-Lambda);    % sort eigenvalues
Lambda=-Lambda;
U=U(:,ordered);                    % sort eigenvectors


% Compute kernel matrix for feature extraction
if display==1,
  disp('Computing kernel matrix for feature extraction...');
end
Kt = kernel(X,T,ker,arg);

% Centering the data.
Jt=ones(N,L)/N;
Kt=Kt - Jt'*K - Kt*J + Jt'*K*J;

% Feature extraction
A=U(:,1:l);              % use first l principal components
Z=(Kt*A)';

return;